Skip to content

Conversation

@maleadt
Copy link
Member

@maleadt maleadt commented Jul 26, 2025

No description provided.

@github-actions
Copy link

github-actions bot commented Jul 26, 2025

Your PR requires formatting changes to meet the project's style guidelines.
Please consider running Runic (git runic main) to apply these changes.

Click here to view the suggested changes.
diff --git a/src/enum.jl b/src/enum.jl
index 1efd14d..0775182 100644
--- a/src/enum.jl
+++ b/src/enum.jl
@@ -21,7 +21,7 @@ macro enum_without_prefix(enum, prefix)
     for instance in instances(enum)
         name = String(Symbol(instance))
         @assert startswith(name, prefix)
-        push!(ex.args, :(const $(Symbol(name[length(prefix)+1:end])) = $(mod).$(Symbol(name))))
+        push!(ex.args, :(const $(Symbol(name[(length(prefix) + 1):end])) = $(mod).$(Symbol(name))))
     end
 
     return esc(ex)
diff --git a/src/memoization.jl b/src/memoization.jl
index bfff8af..2013272 100644
--- a/src/memoization.jl
+++ b/src/memoization.jl
@@ -16,7 +16,7 @@ secondary cache will be a vector with length `maxlen`. Otherwise, a dictionary i
 """
 macro memoize(ex...)
     code = ex[end]
-    args = ex[1:end-1]
+    args = ex[1:(end - 1)]
 
     # decode the code body
     @assert Meta.isexpr(code, :(::))
@@ -28,7 +28,7 @@ macro memoize(ex...)
     if length(args) >= 1
         arg = args[1]
         @assert Meta.isexpr(arg, :(::))
-        key = (val=arg.args[1], typ=arg.args[2])
+        key = (val = arg.args[1], typ = arg.args[2])
     end
     options = Dict()
     for arg in args[2:end]
@@ -41,13 +41,13 @@ macro memoize(ex...)
     @gensym global_cache
 
     # in the presence of thread adoption, we need to use the maximum thread ID
-    nthreads = :( Threads.maxthreadid() )
+    nthreads = :(Threads.maxthreadid())
 
     # generate code to access memoized values
     # (assuming the global_cache can be indexed with the thread ID)
     if key === nothing
         # if we don't have to key on anything, use the global cache directly
-        global_cache_eltyp = :(Union{Nothing,$rettyp})
+        global_cache_eltyp = :(Union{Nothing, $rettyp})
         ex = quote
             cache = get!($(esc(global_cache))) do
                 $global_cache_eltyp[nothing for _ in 1:$nthreads]
@@ -63,8 +63,8 @@ macro memoize(ex...)
         end
     elseif haskey(options, :maxlen)
         # if we know the length of the cache, use a fixed-size array
-        global_cache_eltyp = :(Vector{Union{Nothing,$rettyp}})
-        global_init = :(Union{Nothing,$rettyp}[nothing for _ in 1:$(esc(options[:maxlen]))])
+        global_cache_eltyp = :(Vector{Union{Nothing, $rettyp}})
+        global_init = :(Union{Nothing, $rettyp}[nothing for _ in 1:$(esc(options[:maxlen]))])
         ex = quote
             cache = get!($(esc(global_cache))) do
                 $global_cache_eltyp[$global_init for _ in 1:$nthreads]
@@ -85,8 +85,8 @@ macro memoize(ex...)
         end
     else
         # otherwise, fall back to a dictionary
-        global_cache_eltyp = :(Dict{$(key.typ),$rettyp})
-        global_init = :(Dict{$(key.typ),$rettyp}())
+        global_cache_eltyp = :(Dict{$(key.typ), $rettyp})
+        global_init = :(Dict{$(key.typ), $rettyp}())
         ex = quote
             cache = get!($(esc(global_cache))) do
                 $global_cache_eltyp[$global_init for _ in 1:$nthreads]
@@ -114,7 +114,7 @@ macro memoize(ex...)
         end
     end
 
-    quote
+    return quote
         $ex
     end
 end
diff --git a/src/threading.jl b/src/threading.jl
index 91ab04d..a06b1f7 100644
--- a/src/threading.jl
+++ b/src/threading.jl
@@ -10,7 +10,7 @@ This type is intended for lazy initialization of e.g. global structures, without
 `__init__`. It is similar to protecting accesses using a lock, but is much cheaper.
 
 """
-struct LazyInitialized{T,F}
+struct LazyInitialized{T, F}
     # 0: uninitialized
     # 1: initializing
     # 2: initialized
@@ -21,8 +21,8 @@ struct LazyInitialized{T,F}
     validator::F
 end
 
-LazyInitialized{T}(validator=nothing) where {T} =
-    LazyInitialized{T,typeof(validator)}(Threads.Atomic{Int}(0), Ref{T}(), validator)
+LazyInitialized{T}(validator = nothing) where {T} =
+    LazyInitialized{T, typeof(validator)}(Threads.Atomic{Int}(0), Ref{T}(), validator)
 
 @inline function Base.get!(constructor::Base.Callable, x::LazyInitialized)
     while x.guard[] != 2
@@ -48,11 +48,11 @@ end
     status = Threads.atomic_cas!(x.guard, 0, 1)
     if status == 0
         try
-          x.value[] = constructor()::T
-          x.guard[] = 2
+            x.value[] = constructor()::T
+            x.guard[] = 2
         catch
-          x.guard[] = 0
-          rethrow()
+            x.guard[] = 0
+            rethrow()
         end
     else
         ccall(:jl_cpu_suspend, Cvoid, ())
diff --git a/test/runtests.jl b/test/runtests.jl
index 3c37a44..99b10b7 100644
--- a/test/runtests.jl
+++ b/test/runtests.jl
@@ -80,9 +80,9 @@ using IOCapture
 
     @testset "@enum_without_prefix" begin
         mod = @eval module $(gensym())
-            using GPUToolbox
-            @enum MY_ENUM MY_ENUM_VALUE
-            @enum_without_prefix MY_ENUM MY_
+        using GPUToolbox
+        @enum MY_ENUM MY_ENUM_VALUE
+        @enum_without_prefix MY_ENUM MY_
         end
 
         @test mod.ENUM_VALUE == mod.MY_ENUM_VALUE
@@ -151,7 +151,7 @@ using IOCapture
         # Test memoization with maxlen (vector)
         vec_call_count = Ref(0)
         function test_vec_memo(x)
-            @memoize x::Int maxlen=10 begin
+            @memoize x::Int maxlen = 10 begin
                 vec_call_count[] += 1
                 x * 3
             end::Int

@maleadt maleadt force-pushed the tb/more branch 2 times, most recently from d4cdeeb to e4e3fb0 Compare July 26, 2025 15:49
@maleadt maleadt merged commit 3aa53d2 into main Jul 26, 2025
14 checks passed
@maleadt maleadt deleted the tb/more branch July 26, 2025 16:18
@christiangnrd
Copy link
Member

Did you mean to commit the test manifest?

@maleadt
Copy link
Member Author

maleadt commented Jul 26, 2025

Ah no, good catch.

Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment

Labels

None yet

Projects

None yet

Development

Successfully merging this pull request may close these issues.

3 participants